MR like appoach for pivot aggregation

Generate some data

Fields year, month, chapter, volume, name are enum/factor types.

wordCount and userRating are two numeric (integer) fields we care about.


In [18]:
const _ = require('lodash');
const regeneratorRuntime = require('regenerator-runtime-only')

// generate denormalized data

const levels = {
  year : ['2015', '2016'],
  month : ['Jan', 'Feb'],
  chapter : _.range(1, 21).map(i => `Ch. ${i}`),
  volume : ['Vol. I', 'Vol. II', 'Vol. III'],
  name : ['Book A', 'Book B', 'Book C']
};

const measures = {
  wordCount : () => _.random(1000, 2500),
  userRating: () => _.random(1, 5)
}

const datumGen = () => {
  const d1 = _.mapValues(levels, level => _.sample(level));
  const d2 = _.mapValues(measures, m => m());
  return Object.assign(d1, d2);
}

datumGen()


Out[18]:
{ year: '2015',
  month: 'Feb',
  chapter: 'Ch. 7',
  volume: 'Vol. III',
  name: 'Book C',
  wordCount: 2024,
  userRating: 3 }

In [19]:
const rawData = _.range(1000).map(datumGen);
// first 4 rows

const sample = rawData.slice(0, 4);
sample


Out[19]:
[ { year: '2015',
    month: 'Jan',
    chapter: 'Ch. 12',
    volume: 'Vol. II',
    name: 'Book C',
    wordCount: 1159,
    userRating: 4 },
  { year: '2015',
    month: 'Jan',
    chapter: 'Ch. 5',
    volume: 'Vol. I',
    name: 'Book B',
    wordCount: 1410,
    userRating: 3 },
  { year: '2015',
    month: 'Feb',
    chapter: 'Ch. 5',
    volume: 'Vol. I',
    name: 'Book A',
    wordCount: 1457,
    userRating: 4 },
  { year: '2015',
    month: 'Jan',
    chapter: 'Ch. 5',
    volume: 'Vol. III',
    name: 'Book C',
    wordCount: 1441,
    userRating: 5 } ]

"mapper"

Take each row, yield a couple of rows in plain array format, with items sorted in rowFields, colFields, measures order, fields that are not part of these are dropped.


In [51]:
function* runMapper(rows, mapper) {
  for (let row of rows) {
    yield* mapper(row);
  }
}

const pivotMapper = (rowKeys) => (colKeys) => (measureKeys) => function* (d) {
  const rowVals = rowKeys.map(k => d[k]);
  const colVals = colKeys.map(k => d[k]);
  const measureVals = measureKeys.map(k => d[k]);
  for (let k of _.range(rowKeys.length)) {
    let r = rowVals.map((v,i) => (i>k ? null : v));
    yield [...r, ...colVals, ...measureVals]
  }
}

// two row fields, 0 column fields, 1 measure fields
const mapperTest = runMapper(sample, pivotMapper(['name', 'volume'])([])(['wordCount']))
mapperTest


Out[51]:
GeneratorFunctionPrototype { _invoke: [Function: invoke] }

Sorting

Sort by: row field 0, row field 1, ... , row field m, col field 1, col field 2, ... col field n


In [52]:
function keyBasedComparator(keys) {
  return (a,b) => {
    for(let key of keys) {
      if(a[key] === b[key]) continue;
      if(a[key] === null) return -1;
      if(b[key] === null) return 1;
      if(a[key] > b[key]) return 1;
      if(a[key] < b[key]) return -1;
    }
    return 0;
  }
}


function iterToArray(it) {
  if (Array.isArray(it)) return it;
  let array = [];
  for(let x of it) {
    array.push(x);
  }
  return array;
}

function sortMapperOutput(output, keys) {
  output = iterToArray(output);
  output.sort(keyBasedComparator(keys));
  return output;
}

const sampleSortKeys = [0, 1]; 
const mapperTestOutput = iterToArray(mapperTest);
const sortedMapperOutput = sortMapperOutput(mapperTestOutput, sampleSortKeys);
sortedMapperOutput


Out[52]:
[ [ 'Book A', null, 1457 ],
  [ 'Book A', 'Vol. I', 1457 ],
  [ 'Book B', null, 1410 ],
  [ 'Book B', 'Vol. I', 1410 ],
  [ 'Book C', null, 1159 ],
  [ 'Book C', null, 1441 ],
  [ 'Book C', 'Vol. II', 1159 ],
  [ 'Book C', 'Vol. III', 1441 ] ]

Grouping

Sorted data are much easier to group by, which can be done in a single iteration.

Grouping key is: rowField + colField - by unique values.


In [49]:
function* groupBySorted(iterable, keys) {
  let lastX = null;
  let buffer = [];
  let compare = keyBasedComparator(keys);
  for( let x of iterable) {
    if(!lastX) {
      buffer.push(x);
    } else {
      const sameGroup = compare(x, lastX);
      if (sameGroup === 0) {
        buffer.push(x);
      } else {
        yield buffer;
        buffer = [x];
      }
    }
    lastX = x;
  }
  if(buffer.length) yield buffer;
}

const groupedData = iterToArray(
  groupBySorted(sortedMapperOutput, sampleSortKeys)
)
groupedData


Out[49]:
[ [ [ 'Book A', null, 1457 ] ],
  [ [ 'Book A', 'Vol. I', 1457 ] ],
  [ [ 'Book B', null, 1410 ] ],
  [ [ 'Book B', 'Vol. I', 1410 ] ],
  [ [ 'Book C', null, 1159 ], [ 'Book C', null, 1441 ] ],
  [ [ 'Book C', 'Vol. II', 1159 ] ],
  [ [ 'Book C', 'Vol. III', 1441 ] ] ]

"reducer"

For each group, a supplied reduce function and initial value is run on the group. For simple sum, reducer is sum, and seed is 0.


In [50]:
function* reduceWithKey(iterable, fn, seed) {
  for(let group of iterable) {
    yield group.reduce(fn, seed);
  }
}

function aggregateWordCount(acc, nextRow) {
  const key = _.initial(nextRow);
  const value = _.last(nextRow);
  acc = _.last(acc);
  return [...key, acc+value];
}

const pivotResults = reduceWithKey(groupedData, aggregateWordCount, [null, null, 0]);
iterToArray(
  pivotResults
)


Out[50]:
[ [ 'Book A', null, 1457 ],
  [ 'Book A', 'Vol. I', 1457 ],
  [ 'Book B', null, 1410 ],
  [ 'Book B', 'Vol. I', 1410 ],
  [ 'Book C', null, 2600 ],
  [ 'Book C', 'Vol. II', 1159 ],
  [ 'Book C', 'Vol. III', 1441 ] ]

Full example on 1000-row table

  • Row fields: name, volume
  • Col fields: year, month
  • Measures: sum(wordCount), avg(userRating)

In [72]:
const rowFields = ["name", "volume"];
const colFields = ["year", "month"];
const measures = ["wordCount", "userRating"];

const mapperStep = 
      runMapper(
        rawData, 
        pivotMapper(rowFields)(colFields)(measures)
      );

// sort and group by row and col fields
const sortKeys = _.range(rowFields.length + colFields.length);
const rowFieldsKeys = _.range(rowFields.length);

const sortStep = sortMapperOutput(mapperStep, sortKeys);
const groupStep = groupBySorted(sortStep, sortKeys);


Out[72]:
'use strict'

In [73]:
const empty = [null, null, null, null, 0, { sum: 0, n: 0 }]; // reducer seed

const keyLen = rowFields.length + colFields.length;

// sum word count, average userRating
function aggregate(acc, nextRow) {
  const key = nextRow.slice(0, keyLen);
  const value = nextRow.slice(keyLen);
  acc = acc.slice(keyLen);
  const [accWordCount, accUserRating] = acc;
  const [nextWordCount, nextUserRating] = value;
  
  const userRating = {
    sum : accUserRating.sum + nextUserRating,
    n : accUserRating.n + 1
  };
  
  return [...key, accWordCount+nextWordCount, userRating ];
}

const results = reduceWithKey(groupStep, aggregate, empty);


Out[73]:
'use strict'

In [75]:
// compute average after aggregation
const pivoted = iterToArray(results).map(r => {
  const key = r.slice(0, keyLen);
  const value = r.slice(keyLen);
  const [wordCount, userRating] = value;
  // divide sum / count => average
  return [ ...key, wordCount, userRating.sum / userRating.n]
});


Out[75]:
'use strict'

Stich together API response.


In [79]:
const data = {
  headers: [...rowFields, ...colFields, ...measures],
  values : pivoted
}
data


Out[79]:
{ headers: [ 'name', 'volume', 'year', 'month', 'wordCount', 'userRating' ],
  values: 
   [ [ 'Book A', null, '2015', 'Feb', 166397, 2.9450549450549453 ],
     [ 'Book A', null, '2015', 'Jan', 144552, 2.975609756097561 ],
     [ 'Book A', null, '2016', 'Feb', 143717, 2.988235294117647 ],
     [ 'Book A', null, '2016', 'Jan', 160766, 3.1123595505617976 ],
     [ 'Book A', 'Vol. I', '2015', 'Feb', 54594, 3.1379310344827585 ],
     [ 'Book A', 'Vol. I', '2015', 'Jan', 64410, 3 ],
     [ 'Book A', 'Vol. I', '2016', 'Feb', 54305, 3.032258064516129 ],
     [ 'Book A', 'Vol. I', '2016', 'Jan', 53963, 3.3333333333333335 ],
     [ 'Book A', 'Vol. II', '2015', 'Feb', 55915, 3.0625 ],
     [ 'Book A', 'Vol. II', '2015', 'Jan', 38897, 3.1818181818181817 ],
     [ 'Book A', 'Vol. II', '2016', 'Feb', 43631, 3.2222222222222223 ],
     [ 'Book A', 'Vol. II', '2016', 'Jan', 58896, 2.774193548387097 ],
     [ 'Book A', 'Vol. III', '2015', 'Feb', 55888, 2.6333333333333333 ],
     [ 'Book A', 'Vol. III', '2015', 'Jan', 41245, 2.75 ],
     [ 'Book A', 'Vol. III', '2016', 'Feb', 45781, 2.7037037037037037 ],
     [ 'Book A', 'Vol. III', '2016', 'Jan', 47907, 3.25 ],
     [ 'Book B', null, '2015', 'Feb', 128007, 3.140845070422535 ],
     [ 'Book B', null, '2015', 'Jan', 163561, 3.056179775280899 ],
     [ 'Book B', null, '2016', 'Feb', 141132, 2.9146341463414633 ],
     [ 'Book B', null, '2016', 'Jan', 129437, 3.051948051948052 ],
     [ 'Book B', 'Vol. I', '2015', 'Feb', 50434, 3.357142857142857 ],
     [ 'Book B', 'Vol. I', '2015', 'Jan', 44402, 3.16 ],
     [ 'Book B', 'Vol. I', '2016', 'Feb', 66224, 2.8947368421052633 ],
     [ 'Book B', 'Vol. I', '2016', 'Jan', 39813, 3.260869565217391 ],
     [ 'Book B', 'Vol. II', '2015', 'Feb', 35776, 3.4 ],
     [ 'Book B', 'Vol. II', '2015', 'Jan', 60181, 3.09375 ],
     [ 'Book B', 'Vol. II', '2016', 'Feb', 39140, 2.909090909090909 ],
     [ 'Book B', 'Vol. II', '2016', 'Jan', 55463, 3.0588235294117645 ],
     [ 'Book B', 'Vol. III', '2015', 'Feb', 41797, 2.652173913043478 ],
     [ 'Book B', 'Vol. III', '2015', 'Jan', 58978, 2.9375 ],
     [ 'Book B', 'Vol. III', '2016', 'Feb', 35768, 2.9545454545454546 ],
     [ 'Book B', 'Vol. III', '2016', 'Jan', 34161, 2.8 ],
     [ 'Book C', null, '2015', 'Feb', 146356, 2.9080459770114944 ],
     [ 'Book C', null, '2015', 'Jan', 128778, 3.0277777777777777 ],
     [ 'Book C', null, '2016', 'Feb', 176535, 2.742857142857143 ],
     [ 'Book C', null, '2016', 'Jan', 127342, 2.7857142857142856 ],
     [ 'Book C', 'Vol. I', '2015', 'Feb', 48151, 3.1333333333333333 ],
     [ 'Book C', 'Vol. I', '2015', 'Jan', 38198, 3.1818181818181817 ],
     [ 'Book C', 'Vol. I', '2016', 'Feb', 51384, 3.033333333333333 ],
     [ 'Book C', 'Vol. I', '2016', 'Jan', 34174, 2.210526315789474 ],
     [ 'Book C', 'Vol. II', '2015', 'Feb', 55751, 2.7 ],
     [ 'Book C', 'Vol. II', '2015', 'Jan', 43037, 2.9583333333333335 ],
     [ 'Book C', 'Vol. II', '2016', 'Feb', 50025, 2.6129032258064515 ],
     [ 'Book C', 'Vol. II', '2016', 'Jan', 43721, 2.9166666666666665 ],
     [ 'Book C', 'Vol. III', '2015', 'Feb', 42454, 2.888888888888889 ],
     [ 'Book C', 'Vol. III', '2015', 'Jan', 47543, 2.9615384615384617 ],
     [ 'Book C', 'Vol. III', '2016', 'Feb', 75126, 2.6363636363636362 ],
     [ 'Book C', 'Vol. III', '2016', 'Jan', 49447, 3.074074074074074 ] ] }

Additional Steps to make pivot data UI friendly.

WARNING: messy code ahead...., skip to the end to see JSON results.


In [97]:
const rowIndices = _.range(rowFields.length);
const colIndices = _.range(colFields.length).map(i => i + rowFields.length);
const measureIndices = _.range(measures.length)
                        .map(i => i + rowFields.length + colFields.length);
const uniqColValues = colIndices.map(k => {
  const getter = r => r[k];
  return _.uniqBy(data.values, getter).map(getter);
});
uniqColValues


Out[97]:
[ [ '2015', '2016' ], [ 'Feb', 'Jan' ] ]

In [103]:
const cartesianProduct = (...collections) => {
  collections.reverse();
  const products = collections.reduce( // ? reduceRight
    (tuples, vals) => (tuples ? 
      _.flatMap(vals, v => tuples.map(t => t.concat([v]))) :
      vals.map(v => [v])),
    null
  ).map(t => { t.reverse(); return t; })
  return products;
}

const uniqColCombinations = cartesianProduct(...uniqColValues);
uniqColCombinations


Out[103]:
[ [ '2015', 'Feb' ],
  [ '2015', 'Jan' ],
  [ '2016', 'Feb' ],
  [ '2016', 'Jan' ] ]

In [118]:
const makeMapFromPairs = (pairs) => {
  const map = new Map();
  for(let p of pairs) {
    const [key, value] = p;
    map.set(key, value);
    // map.set(...p);
  }
  return map;
}

const nMeasures = measures.length;
const nPivotColumns = nMeasures * uniqColCombinations.length;

const pivotIndicesMapping = makeMapFromPairs(
  _.zip(
    uniqColCombinations.map(comb => comb.join(",")),
    _.range(0, uniqColCombinations.length*nMeasures, nMeasures)
  )
)
pivotIndicesMapping


Out[118]:
Map {
  '2015,Feb' => 0,
  '2015,Jan' => 2,
  '2016,Feb' => 4,
  '2016,Jan' => 6 }

In [119]:
const takeFromIndices = (arr, indices) => {
  return indices.map(j => arr[j]);
}

const mergeByRow = (hashMap, nextRow) => {
  if(!hashMap) {
    hashMap = new Map();
  } else {
    hashMap = hashMap[1];
  }
  const colKey = takeFromIndices(nextRow, colIndices).join(",");
  const value = takeFromIndices(nextRow, measureIndices);
  hashMap.set(colKey, value);
  return [takeFromIndices(nextRow, rowIndices), hashMap];
}

const groupByRow = groupBySorted(data.values, _.range(rowFields.length));
const tallyed = iterToArray( reduceWithKey(groupByRow, mergeByRow, null) );

tallyed.forEach(v => console.log(v));
""


[ [ 'Book A', null ],
  Map {
    '2015,Feb' => [ 166397, 2.9450549450549453 ],
    '2015,Jan' => [ 144552, 2.975609756097561 ],
    '2016,Feb' => [ 143717, 2.988235294117647 ],
    '2016,Jan' => [ 160766, 3.1123595505617976 ] } ]
[ [ 'Book A', 'Vol. I' ],
  Map {
    '2015,Feb' => [ 54594, 3.1379310344827585 ],
    '2015,Jan' => [ 64410, 3 ],
    '2016,Feb' => [ 54305, 3.032258064516129 ],
    '2016,Jan' => [ 53963, 3.3333333333333335 ] } ]
[ [ 'Book A', 'Vol. II' ],
  Map {
    '2015,Feb' => [ 55915, 3.0625 ],
    '2015,Jan' => [ 38897, 3.1818181818181817 ],
    '2016,Feb' => [ 43631, 3.2222222222222223 ],
    '2016,Jan' => [ 58896, 2.774193548387097 ] } ]
[ [ 'Book A', 'Vol. III' ],
  Map {
    '2015,Feb' => [ 55888, 2.6333333333333333 ],
    '2015,Jan' => [ 41245, 2.75 ],
    '2016,Feb' => [ 45781, 2.7037037037037037 ],
    '2016,Jan' => [ 47907, 3.25 ] } ]
[ [ 'Book B', null ],
  Map {
    '2015,Feb' => [ 128007, 3.140845070422535 ],
    '2015,Jan' => [ 163561, 3.056179775280899 ],
    '2016,Feb' => [ 141132, 2.9146341463414633 ],
    '2016,Jan' => [ 129437, 3.051948051948052 ] } ]
[ [ 'Book B', 'Vol. I' ],
  Map {
    '2015,Feb' => [ 50434, 3.357142857142857 ],
    '2015,Jan' => [ 44402, 3.16 ],
    '2016,Feb' => [ 66224, 2.8947368421052633 ],
    '2016,Jan' => [ 39813, 3.260869565217391 ] } ]
[ [ 'Book B', 'Vol. II' ],
  Map {
    '2015,Feb' => [ 35776, 3.4 ],
    '2015,Jan' => [ 60181, 3.09375 ],
    '2016,Feb' => [ 39140, 2.909090909090909 ],
    '2016,Jan' => [ 55463, 3.0588235294117645 ] } ]
[ [ 'Book B', 'Vol. III' ],
  Map {
    '2015,Feb' => [ 41797, 2.652173913043478 ],
    '2015,Jan' => [ 58978, 2.9375 ],
    '2016,Feb' => [ 35768, 2.9545454545454546 ],
    '2016,Jan' => [ 34161, 2.8 ] } ]
[ [ 'Book C', null ],
  Map {
    '2015,Feb' => [ 146356, 2.9080459770114944 ],
    '2015,Jan' => [ 128778, 3.0277777777777777 ],
    '2016,Feb' => [ 176535, 2.742857142857143 ],
    '2016,Jan' => [ 127342, 2.7857142857142856 ] } ]
[ [ 'Book C', 'Vol. I' ],
  Map {
    '2015,Feb' => [ 48151, 3.1333333333333333 ],
    '2015,Jan' => [ 38198, 3.1818181818181817 ],
    '2016,Feb' => [ 51384, 3.033333333333333 ],
    '2016,Jan' => [ 34174, 2.210526315789474 ] } ]
[ [ 'Book C', 'Vol. II' ],
  Map {
    '2015,Feb' => [ 55751, 2.7 ],
    '2015,Jan' => [ 43037, 2.9583333333333335 ],
    '2016,Feb' => [ 50025, 2.6129032258064515 ],
    '2016,Jan' => [ 43721, 2.9166666666666665 ] } ]
[ [ 'Book C', 'Vol. III' ],
  Map {
    '2015,Feb' => [ 42454, 2.888888888888889 ],
    '2015,Jan' => [ 47543, 2.9615384615384617 ],
    '2016,Feb' => [ 75126, 2.6363636363636362 ],
    '2016,Jan' => [ 49447, 3.074074074074074 ] } ]
Out[119]:
''

In [134]:
const pivotTable = tallyed.map(([rowKey, valueMap]) => {
  const pivotRow = new Array(nPivotColumns);
  for (let [colKey, mVals] of valueMap.entries()) {
    const istart = pivotIndicesMapping.get(colKey);
    const iend = istart + nMeasures;
    for(let i of _.range(istart, iend)) {
      pivotRow[i] = mVals[i - istart];
    }
  }
  return [...rowKey, ...pivotRow];
})
const pivotHeaders = _.flatMap(uniqColCombinations, comb => (
  measures.map(k=> [...comb, k])
))
const betterData = {
  headers: [...rowFields, ...pivotHeaders],
  values: pivotTable
}
betterData


Out[134]:
{ headers: 
   [ 'name',
     'volume',
     [ '2015', 'Feb', 'wordCount' ],
     [ '2015', 'Feb', 'userRating' ],
     [ '2015', 'Jan', 'wordCount' ],
     [ '2015', 'Jan', 'userRating' ],
     [ '2016', 'Feb', 'wordCount' ],
     [ '2016', 'Feb', 'userRating' ],
     [ '2016', 'Jan', 'wordCount' ],
     [ '2016', 'Jan', 'userRating' ] ],
  values: 
   [ [ 'Book A',
       null,
       166397,
       2.9450549450549453,
       144552,
       2.975609756097561,
       143717,
       2.988235294117647,
       160766,
       3.1123595505617976 ],
     [ 'Book A',
       'Vol. I',
       54594,
       3.1379310344827585,
       64410,
       3,
       54305,
       3.032258064516129,
       53963,
       3.3333333333333335 ],
     [ 'Book A',
       'Vol. II',
       55915,
       3.0625,
       38897,
       3.1818181818181817,
       43631,
       3.2222222222222223,
       58896,
       2.774193548387097 ],
     [ 'Book A',
       'Vol. III',
       55888,
       2.6333333333333333,
       41245,
       2.75,
       45781,
       2.7037037037037037,
       47907,
       3.25 ],
     [ 'Book B',
       null,
       128007,
       3.140845070422535,
       163561,
       3.056179775280899,
       141132,
       2.9146341463414633,
       129437,
       3.051948051948052 ],
     [ 'Book B',
       'Vol. I',
       50434,
       3.357142857142857,
       44402,
       3.16,
       66224,
       2.8947368421052633,
       39813,
       3.260869565217391 ],
     [ 'Book B',
       'Vol. II',
       35776,
       3.4,
       60181,
       3.09375,
       39140,
       2.909090909090909,
       55463,
       3.0588235294117645 ],
     [ 'Book B',
       'Vol. III',
       41797,
       2.652173913043478,
       58978,
       2.9375,
       35768,
       2.9545454545454546,
       34161,
       2.8 ],
     [ 'Book C',
       null,
       146356,
       2.9080459770114944,
       128778,
       3.0277777777777777,
       176535,
       2.742857142857143,
       127342,
       2.7857142857142856 ],
     [ 'Book C',
       'Vol. I',
       48151,
       3.1333333333333333,
       38198,
       3.1818181818181817,
       51384,
       3.033333333333333,
       34174,
       2.210526315789474 ],
     [ 'Book C',
       'Vol. II',
       55751,
       2.7,
       43037,
       2.9583333333333335,
       50025,
       2.6129032258064515,
       43721,
       2.9166666666666665 ],
     [ 'Book C',
       'Vol. III',
       42454,
       2.888888888888889,
       47543,
       2.9615384615384617,
       75126,
       2.6363636363636362,
       49447,
       3.074074074074074 ] ] }

Extras

Enough of half-assed functional javascript, now, make a tree out of column headers - using half-assed OOP:


In [171]:
class Tree {
  constructor(nodeData, children) {
    if(nodeData instanceof Tree) {
      return nodeData;
    }
    this.nodeData = _.isUndefined(nodeData) ? null : nodeData;
    this.childNodes = children || {};
  }
  isLeaf() {
    return _.size(this.childNodes) === 0;
  }
  merge(otherTree) {
    const nodeData = otherTree.nodeData || this.nodeData;
    const childrenKeys = _.union(
      Object.keys(this.childNodes), 
      Object.keys(otherTree.childNodes)
    );
    const children = childrenKeys.map(k => {
      const t1 = new Tree(this.childNodes[k]);
      const t2 = new Tree(otherTree.childNodes[k]);
      return [k, t1.merge(t2)]
    });
    return new Tree(nodeData, _.fromPairs(children));
  }
}
Tree.fromPath = function(pathArray, leafData) {
  return pathArray.reduceRight((child, pathPart) => {
    if(!pathPart) {
      return child;
    }
    if(!child) {
      const leaf = new Tree(leafData);
      return new Tree(null, { [pathPart] : leaf });
    } 
    return new Tree(null, { [pathPart]: child });
  }, null);
}
null


Out[171]:
null

In [172]:
console.log(JSON.stringify(
  uniqColCombinations
    .map(Tree.fromPath)
    .reduce((a,b) => a.merge(b), new Tree()),
  null,
  2
))


{
  "nodeData": null,
  "childNodes": {
    "2015": {
      "nodeData": null,
      "childNodes": {
        "Feb": {
          "nodeData": null,
          "childNodes": {}
        },
        "Jan": {
          "nodeData": 1,
          "childNodes": {}
        }
      }
    },
    "2016": {
      "nodeData": null,
      "childNodes": {
        "Feb": {
          "nodeData": 2,
          "childNodes": {}
        },
        "Jan": {
          "nodeData": 3,
          "childNodes": {}
        }
      }
    }
  }
}
Out[172]:
undefined

Same stuff can be used to make a tree out of row hierarchy:


In [175]:
const pivotRowTreeValues = data.values.map(r => r.slice(0, rowFields.length));
console.log(JSON.stringify(
  pivotRowTreeValues
    .map(Tree.fromPath)
    .reduce((a,b) => a.merge(b), new Tree()),
  null,
  2
));


{
  "nodeData": null,
  "childNodes": {
    "Book A": {
      "nodeData": 3,
      "childNodes": {
        "Vol. I": {
          "nodeData": 7,
          "childNodes": {}
        },
        "Vol. II": {
          "nodeData": 11,
          "childNodes": {}
        },
        "Vol. III": {
          "nodeData": 15,
          "childNodes": {}
        }
      }
    },
    "Book B": {
      "nodeData": 19,
      "childNodes": {
        "Vol. I": {
          "nodeData": 23,
          "childNodes": {}
        },
        "Vol. II": {
          "nodeData": 27,
          "childNodes": {}
        },
        "Vol. III": {
          "nodeData": 31,
          "childNodes": {}
        }
      }
    },
    "Book C": {
      "nodeData": 35,
      "childNodes": {
        "Vol. I": {
          "nodeData": 39,
          "childNodes": {}
        },
        "Vol. II": {
          "nodeData": 43,
          "childNodes": {}
        },
        "Vol. III": {
          "nodeData": 47,
          "childNodes": {}
        }
      }
    }
  }
}
Out[175]:
undefined